In [1]:
import tensorflow as tf 
import pandas as pd 
import numpy 
import matplotlib.pyplot as plt 

% matplotlib inline
plt.style.use('ggplot')

In [2]:
filename = 'tsco_clean_wide.csv'
data = pd.read_csv(filename)
data = data.where(data['Open'] >0)
data = data.where(data['High'] >0)
data = data.where(data['Low'] >0)
data = data.where(data['Close'] >0)
data = data.where(data['Volume'] >0)
#
data = data.where(data['Open_m1'] >0)
data = data.where(data['High_m1'] >0)
data = data.where(data['Low_m1'] >0)
data = data.where(data['Close_m1'] >0)
data = data.where(data['Volume_m1'] >0)
#
data = data.where(data['Open_m2'] >0)
data = data.where(data['High_m2'] >0)
data = data.where(data['Low_m2'] >0)
data = data.where(data['Close_m2'] >0)
data = data.where(data['Volume_m2'] >0)

#data = data.where(data['Volume'] != '-')
#data = data.where(data['High'] != '-')
data = data.dropna(how='any')
data.head()
#data = data.astype(float)


Out[2]:
Date next_close_is_high next_close_is_low Open High Low Close Volume Open_m1 High_m1 Low_m1 Close_m1 Volume_m1 Open_m2 High_m2 Low_m2 Close_m2 Volume_m2
1 30-Mar-17 0.0 1.0 189.35 192.15 188.20 189.00 17279065.0 191.90 191.90 189.22 190.25 16366596.0 190.00 192.00 188.00 191.20 21600784.0
2 29-Mar-17 0.0 1.0 191.90 191.90 189.22 190.25 16366596.0 190.00 192.00 188.00 191.20 21600784.0 189.75 190.55 187.55 189.95 12319546.0
3 28-Mar-17 0.0 1.0 190.00 192.00 188.00 191.20 21600784.0 189.75 190.55 187.55 189.95 12319546.0 190.20 191.87 187.86 191.00 12362046.0
4 27-Mar-17 1.0 0.0 189.75 190.55 187.55 189.95 12319546.0 190.20 191.87 187.86 191.00 12362046.0 187.95 190.35 186.01 189.60 19199245.0
5 24-Mar-17 0.0 1.0 190.20 191.87 187.86 191.00 12362046.0 187.95 190.35 186.01 189.60 19199245.0 188.00 188.75 185.55 185.85 17722411.0

In [3]:
#data.iloc[868]
#data.drop('Date', 1, inplace=True)
#data.drop('Date_m1', 1, inplace=True)
#data = data.apply(pd.to_numeric)
#data.iloc[:, 1:8] = data.iloc[:, 1:8].apply(pd.to_numeric)
data.describe()
#data.columns

#data.where(data['Volume'] == '-').dropna(how='all')


Out[3]:
next_close_is_high next_close_is_low Open High Low Close Volume Open_m1 High_m1 Low_m1 Close_m1 Volume_m1 Open_m2 High_m2 Low_m2 Close_m2 Volume_m2
count 3967.000000 3967.000000 3967.000000 3967.000000 3967.000000 3967.000000 3.967000e+03 3967.000000 3967.000000 3967.000000 3967.000000 3.967000e+03 3967.000000 3967.000000 3967.000000 3967.000000 3.967000e+03
mean 0.487522 0.491807 309.606191 312.848621 305.938041 309.392521 2.879238e+07 309.621770 312.863043 305.952153 309.409410 2.880729e+07 309.644495 312.883544 305.972049 309.426426 2.912505e+07
std 0.499907 0.499996 85.514942 86.043057 84.957419 85.468470 4.332879e+07 85.504804 86.031054 84.944907 85.456745 4.338259e+07 85.498260 86.024138 84.936351 85.446820 4.765438e+07
min 0.000000 0.000000 138.600000 139.900000 137.000000 139.200000 8.067000e+03 138.600000 139.900000 137.000000 139.200000 8.067000e+03 138.600000 139.900000 137.000000 139.200000 8.067000e+03
25% 0.000000 0.000000 240.000000 242.750000 236.795000 240.000000 1.532257e+07 240.000000 242.775000 236.920000 240.000000 1.532257e+07 240.000000 242.830000 237.000000 240.000000 1.534273e+07
50% 0.000000 0.000000 318.500000 321.000000 315.300000 318.150000 2.313666e+07 318.500000 321.000000 315.300000 318.150000 2.316123e+07 318.500000 321.000000 315.300000 318.150000 2.318196e+07
75% 1.000000 1.000000 381.075000 385.350000 376.200000 381.050000 3.531167e+07 381.000000 385.350000 376.200000 381.000000 3.532838e+07 381.000000 385.350000 376.250000 381.000000 3.533912e+07
max 1.000000 1.000000 493.000000 494.250000 485.000000 492.000000 1.275697e+09 493.000000 494.250000 485.000000 492.000000 1.275697e+09 493.000000 494.250000 485.000000 492.000000 1.275697e+09

In [4]:
data.plot(x='Date', y='Close')


Out[4]:
<matplotlib.axes._subplots.AxesSubplot at 0x7f9aacd97b00>

In [5]:
predictor = data.iloc[:, 3:18]
predictor.head()


Out[5]:
Open High Low Close Volume Open_m1 High_m1 Low_m1 Close_m1 Volume_m1 Open_m2 High_m2 Low_m2 Close_m2 Volume_m2
1 189.35 192.15 188.20 189.00 17279065.0 191.90 191.90 189.22 190.25 16366596.0 190.00 192.00 188.00 191.20 21600784.0
2 191.90 191.90 189.22 190.25 16366596.0 190.00 192.00 188.00 191.20 21600784.0 189.75 190.55 187.55 189.95 12319546.0
3 190.00 192.00 188.00 191.20 21600784.0 189.75 190.55 187.55 189.95 12319546.0 190.20 191.87 187.86 191.00 12362046.0
4 189.75 190.55 187.55 189.95 12319546.0 190.20 191.87 187.86 191.00 12362046.0 187.95 190.35 186.01 189.60 19199245.0
5 190.20 191.87 187.86 191.00 12362046.0 187.95 190.35 186.01 189.60 19199245.0 188.00 188.75 185.55 185.85 17722411.0

In [6]:
target = data.iloc[:, 1:3]
target.head()


Out[6]:
next_close_is_high next_close_is_low
1 0.0 1.0
2 0.0 1.0
3 0.0 1.0
4 1.0 0.0
5 0.0 1.0

In [7]:
test_ratio = 0.1
print('Total No. of Rows: {}'.format(len(target)))
test_size = int(test_ratio*len(target))
print('Test Set Size: {}' .format(test_size))

test_x = predictor.iloc[:test_size]
test_y = target.iloc[:test_size]

train_x = predictor.iloc[test_size:]
train_y = target.iloc[test_size:]


Total No. of Rows: 3967
Test Set Size: 396

No Normalisation because quick and dirty


In [8]:
# Layer definition
def fc_layer(x, channels_in, channels_out, activation = None, name = 'fc'):
    with tf.name_scope(name):
        W = tf.Variable(tf.truncated_normal(shape=[channels_in, channels_out],stddev=0.01),name = 'W')
        b = tf.Variable(tf.constant(0.1, shape=[channels_out]), name = 'b')
        
        if(activation):
            return activation(tf.matmul(x, W) + b)
        else:
            return tf.matmul(x, W) + b

In [11]:
# Main Model
n_input = 15
n_output = 2
n_hidden = 128

learning_rate = 1e-4
epoch = 10
epoch_size = 100
n_iter = epoch * epoch_size


# Create / Reset the graph 
tf.reset_default_graph()

x = tf.placeholder(tf.float32, shape=[None, n_input])
y_ = tf.placeholder(tf.float32, shape=[None, n_output])
keep_rate = tf.placeholder(tf.float32)

h1 = fc_layer(x, n_input, n_hidden, tf.nn.relu, 'fc_layer_1')
h2 = fc_layer(h1, n_hidden, n_hidden, tf.nn.relu, 'fc_layer_2')
h3 = fc_layer(h2, n_hidden, n_hidden, tf.nn.relu, 'fc_layer_3')
h_dropped = tf.nn.dropout(h3, keep_rate)
h0 = fc_layer(h_dropped, n_hidden, n_output, activation=None, name='output_layer')

hans = tf.nn.softmax(h0)

cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=y_, logits=h0))

correct_prediction = tf.equal(tf.argmax(h0,1), tf.argmax(y_, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))

optimiser = tf.train.AdadeltaOptimizer(learning_rate).minimize(cross_entropy)

In [12]:
%%time
# Training 
sess = tf.InteractiveSession()
tf.global_variables_initializer().run()

for it in range(n_iter+1):
    # No mini batch because dirty 
    optimiser.run(feed_dict={x:train_x.values, y_:train_y.values, keep_rate:0.5})
    
    if it % epoch_size ==0:
        print('Epoch: {:2} cross_entropy: {:6f} accuracy: {:4.3f}%'.format(int(it/epoch_size),
                                                    cross_entropy.eval(feed_dict={x:train_x.values, y_:train_y.values, keep_rate:0.5}),
                                                    100*accuracy.eval(feed_dict={x:train_x.values, y_:train_y.values, keep_rate:0.5})))

print('Test Accuracy: {}' .format(accuracy.eval(feed_dict={x:test_x.values, y_:test_y.values, keep_rate:1.0})))


Epoch:  0 cross_entropy: 181.478821 accuracy: 48.250%
Epoch:  1 cross_entropy: 186.529419 accuracy: 48.950%
Epoch:  2 cross_entropy: 170.863525 accuracy: 48.922%
Epoch:  3 cross_entropy: 186.000275 accuracy: 47.858%
Epoch:  4 cross_entropy: 168.418167 accuracy: 48.978%
Epoch:  5 cross_entropy: 167.724213 accuracy: 50.042%
Epoch:  6 cross_entropy: 149.158569 accuracy: 49.370%
Epoch:  7 cross_entropy: 138.953293 accuracy: 48.250%
Epoch:  8 cross_entropy: 145.351669 accuracy: 47.914%
Epoch:  9 cross_entropy: 127.405350 accuracy: 50.014%
Epoch: 10 cross_entropy: 125.095612 accuracy: 49.510%
Test Accuracy: 0.5454546213150024
CPU times: user 5.27 s, sys: 736 ms, total: 6 s
Wall time: 3.67 s

In [30]:
ans = hans.eval(feed_dict={x:test_x.values, y_:test_y.values, keep_rate:1.0})
labels = test_y.values

In [31]:
ans


Out[31]:
array([[ 0.30867782,  0.69132215],
       [ 0.31785727,  0.68214273],
       [ 0.26731074,  0.7326892 ],
       [ 0.36020142,  0.63979858],
       [ 0.35974333,  0.64025664],
       [ 0.28986672,  0.71013331],
       [ 0.30427593,  0.69572407],
       [ 0.2706444 ,  0.72935563],
       [ 0.33507839,  0.66492164],
       [ 0.22465675,  0.7753433 ],
       [ 0.31459734,  0.68540269],
       [ 0.30816463,  0.69183534],
       [ 0.33281854,  0.66718149],
       [ 0.2719191 ,  0.72808087],
       [ 0.23752783,  0.76247215],
       [ 0.15632015,  0.84367979],
       [ 0.27402839,  0.72597164],
       [ 0.25122675,  0.74877328],
       [ 0.28022444,  0.7197755 ],
       [ 0.31064487,  0.68935513],
       [ 0.35092944,  0.64907062],
       [ 0.28931287,  0.7106871 ],
       [ 0.2744216 ,  0.72557843],
       [ 0.3468501 ,  0.6531499 ],
       [ 0.33731782,  0.66268224],
       [ 0.35454014,  0.64545983],
       [ 0.30129412,  0.69870591],
       [ 0.25400981,  0.74599016],
       [ 0.37042835,  0.62957168],
       [ 0.34281674,  0.65718329],
       [ 0.3008163 ,  0.69918376],
       [ 0.35965103,  0.64034897],
       [ 0.34707195,  0.65292805],
       [ 0.33047175,  0.66952825],
       [ 0.31798458,  0.68201542],
       [ 0.31036028,  0.68963975],
       [ 0.24346179,  0.75653827],
       [ 0.23117776,  0.76882225],
       [ 0.33965802,  0.66034198],
       [ 0.29918599,  0.70081401],
       [ 0.20972165,  0.79027826],
       [ 0.25983691,  0.74016309],
       [ 0.18087667,  0.81912327],
       [ 0.09509102,  0.90490901],
       [ 0.00645972,  0.99354035],
       [ 0.30739725,  0.69260275],
       [ 0.21462516,  0.78537488],
       [ 0.25518087,  0.7448191 ],
       [ 0.27328062,  0.72671944],
       [ 0.1951417 ,  0.80485827],
       [ 0.31335619,  0.68664384],
       [ 0.28341013,  0.71658987],
       [ 0.26727304,  0.73272699],
       [ 0.33613226,  0.66386771],
       [ 0.30828103,  0.69171894],
       [ 0.26632774,  0.73367232],
       [ 0.20356914,  0.79643089],
       [ 0.16985324,  0.83014673],
       [ 0.32776526,  0.67223477],
       [ 0.27431092,  0.72568899],
       [ 0.28109035,  0.71890962],
       [ 0.33789405,  0.66210592],
       [ 0.29778168,  0.70221829],
       [ 0.42010432,  0.57989562],
       [ 0.41259107,  0.5874089 ],
       [ 0.38514969,  0.61485028],
       [ 0.44439945,  0.55560052],
       [ 0.37401956,  0.62598044],
       [ 0.37219855,  0.62780148],
       [ 0.32688975,  0.67311025],
       [ 0.28522992,  0.71477014],
       [ 0.21875586,  0.78124422],
       [ 0.27480507,  0.72519493],
       [ 0.27593189,  0.72406805],
       [ 0.20427403,  0.79572594],
       [ 0.29791421,  0.70208579],
       [ 0.35575086,  0.64424914],
       [ 0.28048393,  0.7195161 ],
       [ 0.27692667,  0.7230733 ],
       [ 0.32459825,  0.67540169],
       [ 0.31205678,  0.68794322],
       [ 0.35535237,  0.6446476 ],
       [ 0.29703316,  0.70296675],
       [ 0.27205721,  0.72794282],
       [ 0.32212898,  0.67787105],
       [ 0.35556501,  0.64443499],
       [ 0.39691618,  0.60308385],
       [ 0.33744654,  0.66255349],
       [ 0.28097537,  0.71902472],
       [ 0.25885773,  0.74114221],
       [ 0.38017792,  0.61982208],
       [ 0.3495028 ,  0.65049726],
       [ 0.31892046,  0.68107951],
       [ 0.21915534,  0.78084469],
       [ 0.16537941,  0.83462059],
       [ 0.22748581,  0.77251416],
       [ 0.28579646,  0.7142036 ],
       [ 0.19231109,  0.80768883],
       [ 0.20311889,  0.79688108],
       [ 0.29993221,  0.70006782],
       [ 0.19070093,  0.80929905],
       [ 0.26905137,  0.73094869],
       [ 0.25515926,  0.74484074],
       [ 0.30920115,  0.69079882],
       [ 0.31411588,  0.68588418],
       [ 0.29871792,  0.70128202],
       [ 0.33737633,  0.6626237 ],
       [ 0.29322532,  0.70677471],
       [ 0.27813587,  0.7218641 ],
       [ 0.27685854,  0.72314143],
       [ 0.37007371,  0.62992632],
       [ 0.33519706,  0.66480297],
       [ 0.30748096,  0.69251907],
       [ 0.22649443,  0.77350551],
       [ 0.23829481,  0.76170516],
       [ 0.28756648,  0.71243358],
       [ 0.20218708,  0.79781294],
       [ 0.22750217,  0.77249783],
       [ 0.3122817 ,  0.68771827],
       [ 0.31982037,  0.6801796 ],
       [ 0.31394881,  0.68605119],
       [ 0.2583811 ,  0.74161893],
       [ 0.23672   ,  0.76328003],
       [ 0.02141974,  0.97858024],
       [ 0.14070652,  0.85929352],
       [ 0.28421667,  0.71578336],
       [ 0.30918539,  0.69081461],
       [ 0.35730749,  0.64269251],
       [ 0.33369374,  0.66630626],
       [ 0.24492539,  0.75507456],
       [ 0.263888  ,  0.736112  ],
       [ 0.39761388,  0.60238618],
       [ 0.33532962,  0.66467035],
       [ 0.34974113,  0.6502589 ],
       [ 0.23603256,  0.76396739],
       [ 0.29789692,  0.70210308],
       [ 0.22550605,  0.77449399],
       [ 0.19867559,  0.80132443],
       [ 0.31583923,  0.68416077],
       [ 0.28413025,  0.71586978],
       [ 0.31720755,  0.68279248],
       [ 0.1833587 ,  0.81664127],
       [ 0.29882431,  0.70117569],
       [ 0.34973013,  0.65026993],
       [ 0.32566896,  0.67433107],
       [ 0.31504434,  0.68495566],
       [ 0.22636904,  0.77363104],
       [ 0.22665301,  0.77334702],
       [ 0.1989906 ,  0.80100942],
       [ 0.3694022 ,  0.63059783],
       [ 0.35590702,  0.64409298],
       [ 0.36812708,  0.63187295],
       [ 0.32925928,  0.67074072],
       [ 0.27945212,  0.72054791],
       [ 0.3786481 ,  0.62135184],
       [ 0.38026831,  0.61973172],
       [ 0.27502388,  0.72497618],
       [ 0.33078286,  0.66921711],
       [ 0.29571831,  0.70428175],
       [ 0.35254368,  0.64745635],
       [ 0.29101217,  0.70898783],
       [ 0.28079605,  0.71920395],
       [ 0.31869191,  0.68130809],
       [ 0.28454447,  0.71545559],
       [ 0.29235524,  0.70764476],
       [ 0.32103062,  0.67896938],
       [ 0.30775294,  0.69224709],
       [ 0.34292975,  0.65707022],
       [ 0.28933248,  0.71066749],
       [ 0.3396979 ,  0.6603021 ],
       [ 0.29015201,  0.70984799],
       [ 0.25787103,  0.74212897],
       [ 0.22952257,  0.77047747],
       [ 0.26384667,  0.73615336],
       [ 0.35002378,  0.64997619],
       [ 0.35359725,  0.64640278],
       [ 0.24122885,  0.75877112],
       [ 0.30583572,  0.69416434],
       [ 0.32363757,  0.67636245],
       [ 0.33971   ,  0.66029   ],
       [ 0.25214392,  0.74785614],
       [ 0.29239002,  0.70760989],
       [ 0.30917284,  0.69082719],
       [ 0.26180348,  0.73819649],
       [ 0.25507596,  0.74492407],
       [ 0.23539315,  0.76460683],
       [ 0.24859692,  0.75140303],
       [ 0.08925218,  0.91074789],
       [ 0.25962526,  0.74037474],
       [ 0.32405281,  0.67594719],
       [ 0.31695455,  0.68304545],
       [ 0.17145287,  0.82854712],
       [ 0.17042233,  0.82957768],
       [ 0.28209838,  0.71790165],
       [ 0.17232476,  0.82767528],
       [ 0.07620254,  0.92379749],
       [ 0.27053282,  0.72946721],
       [ 0.29556528,  0.70443469],
       [ 0.27447289,  0.72552717],
       [ 0.27911356,  0.72088641],
       [ 0.20073369,  0.79926628],
       [ 0.22892682,  0.77107322],
       [ 0.19561309,  0.80438685],
       [ 0.21903831,  0.78096169],
       [ 0.19197884,  0.80802113],
       [ 0.26969129,  0.73030871],
       [ 0.2867572 ,  0.71324283],
       [ 0.20730816,  0.79269189],
       [ 0.19338769,  0.80661225],
       [ 0.19581367,  0.8041864 ],
       [ 0.20524843,  0.79475158],
       [ 0.2460102 ,  0.75398982],
       [ 0.26611483,  0.73388511],
       [ 0.26858741,  0.73141259],
       [ 0.35664979,  0.64335018],
       [ 0.34443566,  0.65556437],
       [ 0.23216695,  0.76783299],
       [ 0.20128563,  0.7987144 ],
       [ 0.25634938,  0.74365062],
       [ 0.26197192,  0.73802799],
       [ 0.24636722,  0.75363278],
       [ 0.2754958 ,  0.72450417],
       [ 0.25888887,  0.74111104],
       [ 0.2074668 ,  0.79253322],
       [ 0.18620034,  0.81379968],
       [ 0.23104765,  0.76895237],
       [ 0.26772535,  0.73227471],
       [ 0.27640402,  0.72359592],
       [ 0.29397887,  0.70602107],
       [ 0.24852072,  0.75147927],
       [ 0.29157993,  0.7084201 ],
       [ 0.2346316 ,  0.76536846],
       [ 0.30908456,  0.69091547],
       [ 0.29076141,  0.70923859],
       [ 0.29407385,  0.70592618],
       [ 0.15807348,  0.84192652],
       [ 0.33081883,  0.66918123],
       [ 0.29323018,  0.70676982],
       [ 0.31472763,  0.68527234],
       [ 0.28358999,  0.71640998],
       [ 0.3213056 ,  0.67869443],
       [ 0.29847887,  0.70152116],
       [ 0.32003814,  0.67996186],
       [ 0.26996717,  0.73003274],
       [ 0.15921392,  0.84078604],
       [ 0.03308608,  0.96691394],
       [ 0.29260814,  0.7073918 ],
       [ 0.34970957,  0.65029037],
       [ 0.3589834 ,  0.6410166 ],
       [ 0.27355996,  0.72644007],
       [ 0.33122265,  0.66877735],
       [ 0.26537958,  0.73462045],
       [ 0.35687494,  0.64312506],
       [ 0.25540832,  0.74459171],
       [ 0.32981205,  0.67018789],
       [ 0.31600648,  0.68399346],
       [ 0.30259845,  0.69740152],
       [ 0.26896781,  0.73103225],
       [ 0.31124702,  0.68875301],
       [ 0.24957772,  0.75042224],
       [ 0.30865207,  0.6913479 ],
       [ 0.21942283,  0.78057712],
       [ 0.30821022,  0.69178975],
       [ 0.38083553,  0.61916447],
       [ 0.35912433,  0.64087564],
       [ 0.31490776,  0.68509227],
       [ 0.30388191,  0.69611812],
       [ 0.19858244,  0.80141759],
       [ 0.28351158,  0.71648836],
       [ 0.19608143,  0.80391854],
       [ 0.34207091,  0.65792912],
       [ 0.27868858,  0.72131139],
       [ 0.28444159,  0.71555835],
       [ 0.31713036,  0.68286967],
       [ 0.27292785,  0.72707212],
       [ 0.22403255,  0.77596742],
       [ 0.31513098,  0.68486905],
       [ 0.29999116,  0.70000887],
       [ 0.31558228,  0.68441772],
       [ 0.27619854,  0.72380149],
       [ 0.34126496,  0.65873504],
       [ 0.28347132,  0.71652865],
       [ 0.26457322,  0.73542678],
       [ 0.18711895,  0.81288105],
       [ 0.23958808,  0.76041192],
       [ 0.27773127,  0.7222687 ],
       [ 0.19413081,  0.80586916],
       [ 0.12068577,  0.87931424],
       [ 0.11760265,  0.88239741],
       [ 0.17710473,  0.82289523],
       [ 0.19707768,  0.80292231],
       [ 0.18312177,  0.8168782 ],
       [ 0.27844134,  0.72155869],
       [ 0.26012808,  0.73987186],
       [ 0.22585675,  0.77414322],
       [ 0.18487637,  0.81512356],
       [ 0.08432887,  0.91567117],
       [ 0.20804766,  0.79195237],
       [ 0.28480282,  0.71519721],
       [ 0.23388426,  0.76611573],
       [ 0.27647927,  0.72352076],
       [ 0.25418729,  0.74581277],
       [ 0.23522285,  0.76477712],
       [ 0.21693739,  0.78306258],
       [ 0.19910111,  0.80089891],
       [ 0.26600155,  0.73399842],
       [ 0.13951524,  0.86048478],
       [ 0.0779162 ,  0.92208385],
       [ 0.16869478,  0.83130521],
       [ 0.15170196,  0.84829801],
       [ 0.28030217,  0.71969783],
       [ 0.16840015,  0.83159983],
       [ 0.20624028,  0.7937597 ],
       [ 0.22631681,  0.77368319],
       [ 0.19500189,  0.80499816],
       [ 0.25648785,  0.74351215],
       [ 0.44382545,  0.55617458],
       [ 0.38928041,  0.61071962],
       [ 0.33315381,  0.66684616],
       [ 0.47166458,  0.52833545],
       [ 0.30469543,  0.69530457],
       [ 0.36050764,  0.63949227],
       [ 0.31683382,  0.68316615],
       [ 0.16192235,  0.8380776 ],
       [ 0.25293353,  0.7470665 ],
       [ 0.22446242,  0.77553755],
       [ 0.24134915,  0.75865084],
       [ 0.23573802,  0.76426196],
       [ 0.26239681,  0.73760313],
       [ 0.23384699,  0.76615304],
       [ 0.18111289,  0.81888705],
       [ 0.26516649,  0.73483348],
       [ 0.27633014,  0.72366983],
       [ 0.25228065,  0.74771935],
       [ 0.3528091 ,  0.64719087],
       [ 0.30055955,  0.69944042],
       [ 0.21010131,  0.78989869],
       [ 0.32531261,  0.67468739],
       [ 0.36889634,  0.63110363],
       [ 0.30830985,  0.69169009],
       [ 0.24740325,  0.75259674],
       [ 0.20174585,  0.79825407],
       [ 0.27509195,  0.72490805],
       [ 0.31216782,  0.68783212],
       [ 0.26720071,  0.73279929],
       [ 0.17918001,  0.82081997],
       [ 0.31181979,  0.68818015],
       [ 0.24598023,  0.7540198 ],
       [ 0.2723361 ,  0.72766393],
       [ 0.22437425,  0.77562577],
       [ 0.21903604,  0.78096396],
       [ 0.34227923,  0.65772074],
       [ 0.36601773,  0.6339823 ],
       [ 0.32125112,  0.67874891],
       [ 0.33358946,  0.66641057],
       [ 0.39375824,  0.60624182],
       [ 0.39650491,  0.60349512],
       [ 0.31897211,  0.68102789],
       [ 0.34682593,  0.6531741 ],
       [ 0.34630439,  0.65369564],
       [ 0.33544841,  0.66455156],
       [ 0.39169273,  0.60830736],
       [ 0.32014742,  0.6798526 ],
       [ 0.2619206 ,  0.73807937],
       [ 0.30693781,  0.69306219],
       [ 0.25734091,  0.74265909],
       [ 0.3428067 ,  0.6571933 ],
       [ 0.32857585,  0.67142421],
       [ 0.31719711,  0.68280292],
       [ 0.33324802,  0.66675198],
       [ 0.25092828,  0.74907166],
       [ 0.31924218,  0.68075788],
       [ 0.28487238,  0.71512765],
       [ 0.2195818 ,  0.78041822],
       [ 0.19732696,  0.8026731 ],
       [ 0.2697396 ,  0.73026043],
       [ 0.24290922,  0.75709075],
       [ 0.31890565,  0.68109435],
       [ 0.26243436,  0.7375657 ],
       [ 0.183778  ,  0.81622201],
       [ 0.21228613,  0.78771389],
       [ 0.31906426,  0.68093574],
       [ 0.26334712,  0.73665285],
       [ 0.25844714,  0.74155289],
       [ 0.33969644,  0.66030353],
       [ 0.19213088,  0.80786914],
       [ 0.38343689,  0.61656314],
       [ 0.2630049 ,  0.7369951 ],
       [ 0.31813514,  0.68186486],
       [ 0.28059867,  0.71940136],
       [ 0.2656534 ,  0.73434663],
       [ 0.29685664,  0.70314336],
       [ 0.35822996,  0.64177001],
       [ 0.27123243,  0.72876757],
       [ 0.34888643,  0.65111363],
       [ 0.27121195,  0.72878808],
       [ 0.30680999,  0.69319004],
       [ 0.28800157,  0.71199846]], dtype=float32)

In [ ]: